Thursday, January 31, 2013

Spring Batch and Scheduler Integration

1) configure scheduler


<!-- configure scheduler -->
<!-- Set launcher properties -->
<bean id="pubLauncher" class="com.abc.PublicCompanyLauncher">
<!-- Job to execute -->
<property name="job" ref="publicCompanyJob" />
<!-- Job launcher defined in job repository -->
<property name="jobLauncher" ref="jobLauncher" />
</bean>

<!-- Scheduler definition -->
<task:scheduler id="pubScheduler" pool-size="3" />

<!-- Task definition to be scheduled -->
<task:scheduled-tasks scheduler="pubScheduler">
<!-- Scheduler will call method 'launch' every 10 seconds -->
<task:scheduled ref="pubLauncher" method="launch"
fixed-rate="10000" />
</task:scheduled-tasks>

2) Launcher class


package com.abcr;

import java.io.File;
import java.util.HashMap;
import java.util.Map;

import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;

import com.wealthx.uhnw.common.util.AppContext;

public class PublicCompanyLauncher {

private Job job;
private JobLauncher jobLauncher;

/**
* This method will be call every time after spring scheduler launch.
*/
public void launch() {

// Create parameters for job.
JobParameters jobParams = createJobParameters();

if (jobParams == null || jobParams.isEmpty()) {
System.out.println("nothing to do........................");
return;
}

// Run job.
JobExecution result;
try {
result = getJobLauncher().run(getJob(), jobParams);
} catch (JobExecutionAlreadyRunningException e) {
} catch (JobRestartException e) {
} catch (JobInstanceAlreadyCompleteException e) {
} catch (JobParametersInvalidException e) {
}

}

private JobParameters createJobParameters() {
// take only one .txt file in the folder to process
File dir = new File("d:/temp/test");
JobParameters jobParameters = null;
for (File f : dir.listFiles()) {
System.out.println("======================= " + f.getName());
if (!f.getName().endsWith(".txt"))
continue;
JobParameter jp = new JobParameter(f.getAbsolutePath());
Map<String, JobParameter> map = new HashMap<String, JobParameter>();
map.put("inputFile", jp);
jobParameters = new JobParameters(map);
break;
}

return jobParameters;
}

public void setJob(Job job) {
this.job = job;
}

public Job getJob() {
return job;
}

public void setJobLauncher(JobLauncher jobLauncher) {
this.jobLauncher = jobLauncher;
}

public JobLauncher getJobLauncher() {
return jobLauncher;
}
}

3) Main method

ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(
"spring-context-jobs.xml");
context.start();


Spring Batch - read multiple files/process/wirte

This is a demo to use spring batch framework to read multiple files, process, and write to another files or DB.

1) The start point of batch is batch launcher is used to launch a job


public void launch() {
ApplicationContext context = new ClassPathXmlApplicationContext(
"spring-batch-job-publiccompany.xml");
JobLauncher launcher = (JobLauncher) context.getBean("jobLauncher");
Job job = (Job) context.getBean("publicCompanyJob");

try {
JobExecution result = launcher.run(job, new JobParameters());
System.out.println("->>---" + result.toString());
}
} catch (Exception e) {
e.printStackTrace();
}
}

2) take a look at the job settings.

This is the content of spring-batch-job-publiccompany.xml


<?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
xmlns:beans="http://www.springframework.org/schema/beans"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch.xsd">

<beans:import resource="classpath:spring-batch.xml" />

<job id="publicCompanyJob" restartable="true">
<step id="readFile">
<tasklet transaction-manager="transactionManager"
start-limit="1" allow-start-if-complete="false">
<chunk reader="multifileReader" processor="publicCompanyItemProcessor"
writer="publicCompanyItemWriter" commit-interval="1">
</chunk>
</tasklet>
</step>
<!-- <step id="saveData"> </step> -->

<listeners>
<listener ref="publicCompanyJobListener" />
</listeners>
</job>

<beans:bean id="multifileReader"
class="org.springframework.batch.item.file.MultiResourceItemReader"
scope="step" lazy-init="true">
<beans:property name="resources" value="file://d:/temp/test/*.txt" />
<beans:property name="delegate" ref="publicCompanyItemReader" />
</beans:bean>

<beans:bean id="publicCompanyItemReader"
class="org.springframework.batch.item.file.FlatFileItemReader" scope="step">
<beans:property name="resource" ref="inputFile"/>
<beans:property name="linesToSkip" value="1" />
<beans:property name="lineMapper">
<beans:bean
class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<beans:property name="lineTokenizer" ref="lineTokenizer" />
<beans:property name="fieldSetMapper">
<beans:bean
class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<beans:property name="prototypeBeanName" value="publicCompanyInfo"></beans:property>
</beans:bean>
</beans:property>
</beans:bean>
</beans:property>
</beans:bean>

<beans:bean id="lineTokenizer"
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer"
p:names="publicCompanyId,publicCompanyName" p:delimiter="," />

</beans:beans>

3) spring-batch.xml


<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd">

<import resource="classpath:spring-context-jobs.xml" />

<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
        <!--  use in-memory db, change to actual db if need -->
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean">
</bean>
        <!-- use ResourcelessTransactionManager if using in-memory db  -->
<bean id="transactionManager"
class="org.springframework.batch.support.transaction.ResourcelessTransactionManager" />
</beans>

4) processor class


@Component
public class PublicCompanyItemProcessor ItemProcessor<PublicCompanyInfo, PublicCompanyInfo> {

public PublicCompanyInfo process(PublicCompanyInfo info) throws Exception {
System.out.println("processing info:"+info);

return info;

}
}

5) writer class


@Component
public class PublicCompanyItemWriter implements ItemWriter<PublicCompanyInfo> {

public void write(List<? extends PublicCompanyInfo> items) throws Exception {

               // write a list of beans to DB or file
}
}

so all the files ending with .txt in folder d:/temp/test will be read and processed, but it is hard to handle if we want to rename a file after a file is processed, and go on to process the next file. To achieve this target, we need to do some changes as follows,

1) change launcher to find out how many files in the folder, then launch job for each file


public void launch() {
ApplicationContext context = new ClassPathXmlApplicationContext(
"spring-batch-job-publiccompany.xml");
JobLauncher launcher = (JobLauncher) context.getBean("jobLauncher");
Job job = (Job) context.getBean("publicCompanyJob");

try {

File dir = new File("d:/temp/test");
for (File f : dir.listFiles()) {
JobParameter jp = new JobParameter(f.getAbsolutePath());
Map<String, JobParameter> map = new HashMap<String, JobParameter>();
map.put("inputFile", jp);  // need to configure in job settings
JobExecution result = launcher.run(job, new JobParameters(map));

System.out.println("->>---" + result.toString());
}
} catch (Exception e) {
e.printStackTrace();
}
}



2) change job settings

<?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
xmlns:beans="http://www.springframework.org/schema/beans"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch.xsd">

<beans:import resource="classpath:spring-batch.xml" />

<job id="publicCompanyJob" restartable="true">
<step id="readFile">
<tasklet transaction-manager="transactionManager"
start-limit="1" allow-start-if-complete="false">
<chunk reader="publicCompanyItemReader" processor="publicCompanyItemProcessor"
writer="publicCompanyItemWriter" commit-interval="1">
</chunk>
</tasklet>
</step>
<!-- <step id="saveData"> </step> -->

<listeners>
<listener ref="publicCompanyJobListener" />
</listeners>
</job>

<beans:bean id="publicCompanyItemReader"
class="org.springframework.batch.item.file.FlatFileItemReader" scope="step">
<beans:property name="resource" ref="inputFile"/>
<beans:property name="linesToSkip" value="1" />
<beans:property name="lineMapper">
<beans:bean
class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<beans:property name="lineTokenizer" ref="lineTokenizer" />
<beans:property name="fieldSetMapper">
<beans:bean
class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<beans:property name="prototypeBeanName" value="publicCompanyInfo"></beans:property>
</beans:bean>
</beans:property>
</beans:bean>
</beans:property>
</beans:bean>

<beans:bean id="lineTokenizer"
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer"
p:names="publicCompanyId,publicCompanyName" p:delimiter="," />

<beans:bean id="inputFile"
class="org.springframework.core.io.FileSystemResource" scope="step">
<beans:constructor-arg value="#{jobParameters[inputFile]}" />
</beans:bean>

</beans:beans>

3) we have a listener to rename the file has been processed after a job is completed

package abc;

import java.io.File;

import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.annotation.AfterJob;
import org.springframework.batch.core.annotation.BeforeJob;
import org.springframework.stereotype.Component;


@Component
public class PublicCompanyJobListener extends ServiceBase {

@AfterJob
public void afterJob(JobExecution jobExecution) {
if (jobExecution.getStatus() == BatchStatus.COMPLETED) {
String fName = jobExecution.getJobInstance().getJobParameters().getString("inputFile");
File f = new File (fName);
f.renameTo(new File(fName+".bak"));
System.out.println("job completed");
// Notifying when job successfully ends
} else if (jobExecution.getStatus() == BatchStatus.FAILED) {
// Notifying when job ends with failure
System.out.println("job fail");
}
}

@BeforeJob
public void beforeJob(JobExecution jobExecution) {
System.out.println("job start");
}
}




Tuesday, January 29, 2013

Maven Tips 2 - copy all dependent jars to a specified folder

1) use maven-dependency-plugin

             <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.6</version>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>target/dist/lib</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
</configuration>
</execution>
</executions>
</plugin>

2) if you are using m2e, you will get the error as follows if using above plugin

maven-dependency-plugin (goals "copy-dependencies", "unpack") is not supported by m2e.

how to resolve, add the following plugin management within <build> element


<pluginManagement>
<plugins>
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<versionRange>[2.0,)</versionRange>
<goals>
<goal>copy-dependencies</goal>
</goals>
</pluginExecutionFilter>
<action>
<execute />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>

Maven3 install local 3rd party jar

1) install the third party jar to local repository


mvn install:install-file -Dfile={your local jar location}  -DgroupId=org.tanukisoftware -DartifactId=wrapper -Dversion=3.5.15 -Dpackaging=jar  -DgeneratePom=true

2) add it in depedency
<dependency>

<groupId>org.tanukisoftware</groupId>
<artifactId>wrapper</artifactId>
<version>3.5.15</version>
</dependency>

Take Note:  cannot miss this argument -DgeneratePom=true, if without this argument, maven will not generate pom file, then when you use maven to build your code, maven will try to download the jar from remote repository

Monday, January 28, 2013

SpringMVC displays error message on the same page

This is an example jsp page:


<form:form action="dologin.do" commandName="hello" method="post" id="abc">
<fmt:message key="label.username" />
<form:input path="userName" />
<form:errors path="userName" />
<br />
<fmt:message key="label.password" />
<form:input path="password" />
<form:errors path="password" />
<br />
Date: <input type="text" id="datepicker" />
<br />
<input type="submit" value="submit it" />
<c:out value="${exception}"/>
</form:form>



1) show error message for data validation


@RequestMapping(value = "/dologin.do", method = RequestMethod.POST)
public ModelAndView dologin(@Valid @ModelAttribute("hello") Hello hello,
BindingResult result,HttpSession session) throws Exception {

if (result.hasErrors()) {    // trigger to show validation error
return new ModelAndView("login");
}

try {
accountService.addAccount(hello.getUserName(), hello.getPassword());
} catch (Exception e) {
error(method,"Could not add account:"+hello,e);
throw e;    // remember to throw out exception
}
return new ModelAndView("hello");
}

===== validator =====


@Service
public class TestValidator implements Validator {

public boolean supports(Class<?> cls) {
return com.demo.web.bean.Hello.class.isAssignableFrom(cls);

}

public void validate(Object obj, Errors errors) {

Hello hello = (Hello) obj;
validateUserName(hello, errors);
validatePassword(hello, errors);
}

private void validateUserName(Hello hello, Errors errors) {

if (hello.getUserName() == null
|| hello.getUserName().trim().length() == 0) {

errors.rejectValue("userName", "username.empty");
}
if (hello.getUserName().equalsIgnoreCase("WX")) {

errors.rejectValue("userName", "username.me");
}
}

private void validatePassword(Hello hello, Errors errors) {
if (hello.getPassword() == null
|| hello.getPassword().trim().length() == 0) {

errors.rejectValue("password", "password.empty");
}
}
}

2) show error message if process failure

2.1) in your controller method, you are supposed to throw out the exception


try {
accountService.addAccount(......);
} catch (Exception e) {
error(method,"Could not add account:"+hello,e);
throw e;
}
return new ModelAndView("hello");


2.2) add one more method to handle the exception and give the error message to be shown on the web page.

       @ExceptionHandler( Exception.class )
public ModelAndView handleException( Exception ex ){
ModelAndView mv = new ModelAndView("login","exception","cannot add account info.");
mv.addObject("hello", new Hello());
   return mv;
}


JQuery DataTables - add id attribute to each row




"fnRowCallback": function( nRow, aData, iDisplayIndex, iDisplayIndexFull) {
   $(nRow).attr("id",aData.userid);
   return nRow;
 }

this is example data returned from server:

[{"userid":"wx","password":"wx1","timeStamp":1358999302000,"checked":1},{"userid":"u7","password":"p7","timeStamp":1358912639000,"checked":1},{"userid":"u5","password":"p5","timeStamp":1358836704000,"checked":1},{"userid":"rr","password":"ee","timeStamp":1359343559000,"checked":1},{"userid":"ds","password":"a","timeStamp":1358911426000,"checked":1},{"userid":"bdsdc","password":"dbdsa","timeStamp":1359343572000,"checked":1},{"userid":"bdc","password":"dbdsa","timeStamp":1359343496000,"checked":1},{"userid":"bc","password":"dbdsa","timeStamp":1359342034000,"checked":1},{"userid":"a1","password":"a","timeStamp":1359012297000,"checked":0}]

this is the actual result of the tables shown on page:

<tr class="odd" id="a1"><td class=" sorting_1">a1</td><td class="">a</td><td class="">1359012297000</td><td class=""><input type="checkbox" name="aCheckBox" class="dt_checked" value="0"></td><td class="center"><a href="" class="editor_remove">Delete</a></td></tr>

JQuery DataTables update cells

1) on client side,


var oTable2 = $('#example').dataTable().makeEditable({
sUpdateURL : "updateUser.do",
"aoColumns" : [ {}, null, null, null, null ]   // null means this column is not allowed to edit
});

2) on servlet side,


@RequestMapping(value = "/updateUser.do", produces = "application/json")
public @ResponseBody
String updateUser(@RequestParam String value, @RequestParam String id) {
String method = "updateUser";
info(method, "recieved updating record:[value]=" + value + ",[id]="
+ id);
                // update data in db
                 ...............
// if success, return the exact the same value back
// return value;
//if false, return error msg directly
return "failed to update";
}

3) actual result if failed to update










as to how to set id to each row in the table, refer to http://wangxiangblog.blogspot.sg/2013/01/jquery-datatables-add-id-attribute-to.html

SpringMVC replicates sessions in cluster servers

env:
refer to http://wangxiangblog.blogspot.sg/2012/08/tomcat-cluster.html   to setup cluster servers.

1) this configuration file is for load balancer


worker.list=loadbalancer,cluster1,cluster2
#========cluster1========
worker.cluster1.port=8889                    
worker.cluster1.host=localhost                
worker.cluster1.type=ajp13
worker.cluster1.lbfactor = 1                  
#========cluster2========
worker.cluster2.port=8899
worker.cluster2.host=localhost
worker.cluster2.type=ajp13
# The higher the value of the lbfactor for Tomcat instance, the more work the server will do, and vice versa
worker.cluster2.lbfactor =1                
#========controller======
worker.loadbalancer.type=lb                
worker.loadbalancer.balanced_workers=cluster1,cluster2
# keep requests belonging to the same session (which means the same user) forwarded to the same worker
worker.loadbalancer.sticky_session=true

2) add <distributable/> element in web.xml

3) code in servlet


@RequestMapping(value="/Session1.htm")
public ModelAndView doit(@RequestParam("name") String name,HttpSession session){
sessionObj.setName(name);
session.setAttribute("sessionObj", sessionObj);

ModelAndView mav = new ModelAndView("hello2");
mav.addObject(sessionObj);
return mav;
}


      @RequestMapping(value="/Session2.htm")
public ModelAndView doit(HttpSession session){
SessionObj sob=(SessionObj)session.getAttribute("sessionObj");

ModelAndView mav = new ModelAndView("hello3");
mav.addObject("sessionObj", sob);
return mav;
}




Take Note:
if set sticky as false, session will be lost in cluster env.  so how to resolve it?
Workaround solution to add <distributable/> element to \META-INF\web-fragment.xml in spring-web-3.2.0.RELEASE.jar

-----------------------------
Tried to run the similar test session web app with struts2 in cluster servers to replicate session, but cannot work if sticky=false .


Friday, January 25, 2013

Spring MVC + Hibnerate + jQuery DataTables + Pagination

JQuery datatables (http://www.datatables.net/) always gets all data from server for its build-in pagination, it is not good for huge data, the better approach is to pass current position and page size to server and get data from database batch by batch.

so refer to http://datatables.net/usage/server-side, there are some para we must pass from client to server.

1) on the client side,


oTBExample2 = $("#example2").dataTable({
"bProcesing" : true,
"bServerSide" : true,
"bLenthChange" : false,
"iDisplayLength" : 10,
"sAjaxSource" : "users2.do",
"aoColumns" : [
{
"sTitle" : "User ID",
"mData" : "userid"
},
{
"sTitle" : "Password",
"mData" : "password"
},
{
"sTitle" : "Date & Time",
"mData" : "timeStamp"
},
{
"mData" : "checked",
"fnRender" : function(obj) {
if (obj.aData.checked == "1")
return '<input type="checkbox" name="aCheckBox" class="dt_checked" checked value="'+obj.aData.checked+'\"/>';

return '<input type="checkbox" name="aCheckBox"  class="dt_checked" value="'+obj.aData.checked+'\"/>';
}
},
{
"mData" : null,
"sClass" : "center",
"sDefaultContent" : '<a href="" class="editor_remove" >Delete</a>'
} ],
"fnServerData" : function(sSource, aoData, fnCallback) {
$.ajax({
"dataType" : 'json',
"type" : "GET",
"url" : sSource,
"data" : aoData,
"success" : fnCallback
});
},
"sPaginationType" : "full_numbers"

});// dataTable



<table id="example2" cellpadding="0" cellspacing="0" border="0"
class="display datatable">
<thead>
<tr>
<th width="20%">userid</th>
<th width="20%">password</th>
<th width="20%">time</th>
<th width="10%">selected</th>
<th width="10%">Delete</th>
</tr>
</thead>
<tbody>
</tbody>
</table>


2) on server side,



@RequestMapping(value = "/users2", produces = "application/json")
public @ResponseBody
String showUser(@RequestParam int iDisplayStart,
            @RequestParam int iDisplayLength, @RequestParam int sEcho) {
String method="showUser";
info(method,"para0---"+iDisplayStart);
info(method,"para1---"+iDisplayLength);
info(method,"para2---"+sEcho);
DataTablesTO<Account> dt = new DataTablesTO<Account>();

List<Account> accts = accountService.getAccounts(iDisplayStart,iDisplayLength);
List<Account> accts2 = accountService.getAccounts();
dt.setAaData(accts);  // this is the dataset reponse to client
dt.setiTotalDisplayRecords(accts2.size());  // // the total data in db for datatables to calculate page no. and position
dt.setiTotalRecords(accts2.size());   // the total data in db for datatables to calculate page no.
dt.setsEcho(sEcho);

return toJson(dt);
}


private String toJson(DataTablesTO<?> dt){
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.writeValueAsString(dt);
} catch (JsonProcessingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
}

need jackson jar files to support json conversion,

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;


public class DataTablesTO<T> implements java.io.Serializable{

private static final long serialVersionUID = -8220588043068200705L;
private List<T> aaData;
private int sEcho;
private Integer iTotalRecords;
private Integer iTotalDisplayRecords;
...... getter and setter.......
}

3) hibernate query


public List<Account> getAccounts(int currPosition, int pageSize) {

return sessionFactory.getCurrentSession().createQuery("from Account").setMaxResults(pageSize).setFirstResult(currPosition).list();
}


current position which means the first row to be shown on the page in the table.

4) actual result on page









5) this is the request data when call ajax


  1. sEcho:
    1
  2. iColumns:
    5
  3. sColumns:
  4. iDisplayStart:
    0
  5. iDisplayLength:
    10
  6. mDataProp_0:
    userid
  7. mDataProp_1:
    password
  8. mDataProp_2:
    timeStamp
  9. mDataProp_3:
    checked
  10. mDataProp_4:
  11. sSearch:
  12. bRegex:
    false
  13. sSearch_0:
  14. bRegex_0:
    false
  15. bSearchable_0:
    true
  16. sSearch_1:
  17. bRegex_1:
    false
  18. bSearchable_1:
    true
  19. sSearch_2:
  20. bRegex_2:
    false
  21. bSearchable_2:
    true
  22. sSearch_3:
  23. bRegex_3:
    false
  24. bSearchable_3:
    true
  25. sSearch_4:
  26. bRegex_4:
    false
  27. bSearchable_4:
    true
  28. iSortCol_0:
    0
  29. sSortDir_0:
    asc
  30. iSortingCols:
    1
  31. bSortable_0:
    true
  32. bSortable_1:
    true
  33. bSortable_2:
    true
  34. bSortable_3:
    true
  35. bSortable_4:
    true

6) right now, the datatables does not support order by column, if click the second column to order by it,let's check the request data.

  1. sEcho:
    7
  2. iColumns:
    5
  3. sColumns:
  4. iDisplayStart:
    0
  5. iDisplayLength:
    10
  6. mDataProp_0:
    userid
  7. mDataProp_1:
    password
  8. mDataProp_2:
    timeStamp
  9. mDataProp_3:
    checked
  10. mDataProp_4:
  11. sSearch:
  12. bRegex:
    false
  13. sSearch_0:
  14. bRegex_0:
    false
  15. bSearchable_0:
    true
  16. sSearch_1:
  17. bRegex_1:
    false
  18. bSearchable_1:
    true
  19. sSearch_2:
  20. bRegex_2:
    false
  21. bSearchable_2:
    true
  22. sSearch_3:
  23. bRegex_3:
    false
  24. bSearchable_3:
    true
  25. sSearch_4:
  26. bRegex_4:
    false
  27. bSearchable_4:
    true
  28. iSortCol_0:
    1 // means sorted by the second column
  29. sSortDir_0:
    asc // means order by asc, if click again, it will change to desc
  30. iSortingCols:
    1
  31. bSortable_0:
    true
  32. bSortable_1:
    true
  33. bSortable_2:
    true
  34. bSortable_3:
    true
  35. bSortable_4:
    true


so, we also need to change the hql as follows

from Account order by ${iSortCol_0}+1 ${sSortDir_0}