Spring boot and Spring data JPA integration

Nowadays spring and JPA integration has become a piece of cake thanks to Spring Boot and spring Data.

I am gonna setup a postgresql server

docker pull postgres
#run the container
docker run --name postgreslocal -e POSTGRES_PASSWORD=postgres -d postgres
#get the ip
docker inspect --format '{{ .NetworkSettings.IPAddress }}' postgreslocal
#get the port
docker inspect --format '{{ .NetworkSettings.Ports }}' postgreslocal

Create the employees Table

create schema spring_data_jpa_example;

create table spring_data_jpa_example.employee(
	id  SERIAL PRIMARY KEY,
	firstname	TEXT	NOT NULL,
	lastname	TEXT	NOT NULL,	
   	email		TEXT 	not null,
   	age         INT     NOT NULL,
   	salary         real,
	unique(email)
);

insert into spring_data_jpa_example.employee (firstname,lastname,email,age,salary) 
values ('Emmanouil','Gkatziouras','gkatzioura@gmail.com',18,3000.23);

Let’s begin with our gradle file

group 'com.gkatzioura'
version '1.0-SNAPSHOT'

apply plugin: 'java'

sourceCompatibility = 1.8

buildscript {
    repositories {
        mavenCentral()
    }
    dependencies {
        classpath("org.springframework.boot:spring-boot-gradle-plugin:1.3.3.RELEASE")
    }
}

apply plugin: 'idea'
apply plugin: 'spring-boot'

repositories {
    mavenCentral()
}

dependencies {
    compile("org.springframework.boot:spring-boot-starter-web") {
        exclude module: "spring-boot-starter-tomcat"
    }
    compile("org.postgresql:postgresql:9.4-1206-jdbc42")
    compile("org.springframework.boot:spring-boot-starter-jetty")
    compile("org.springframework.boot:spring-boot-starter-data-jpa:1.3.3.RELEASE")
    compile("com.mchange:c3p0:0.9.5.2")
    testCompile("junit:junit:4.11");
}

As you see we added the c3p0 connection pool, the spring-boot-starter-data-jpa for hibernate and the postgres driver. That’s all we need.

The Application class

package com.gkatzioura.springdata.jpa;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;

/**
 * Created by gkatzioura on 6/2/16.
 */
@SpringBootApplication
public class Application {


    public static void main(String[] args) {

        SpringApplication springApplication = new SpringApplication();
        ApplicationContext ctx = springApplication.run(Application.class, args);
    }
}

The DataSource configuration

package com.gkatzioura.springdata.jpa.config;

import com.mchange.v2.c3p0.ComboPooledDataSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.sql.DataSource;

/**
 * Created by gkatzioura on 6/2/16.
 */
@Configuration
public class DataSourceConfig {

    @Bean
    public DataSource createDataSource() throws Exception {

        ComboPooledDataSource ds = new ComboPooledDataSource();
        ds.setJdbcUrl("jdbc:postgresql://172.17.0.3:5432/postgres?user=postgres&password=postgres");
        ds.setDriverClass("org.postgresql.Driver");

        return ds;
    }

}

Our entity for the table employee

package com.gkatzioura.springdata.jpa.persistence.entity;

import javax.persistence.*;

/**
 * Created by gkatzioura on 6/2/16.
 */
@Entity
@Table(name = "employee", schema="spring_data_jpa_example")
public class Employee {

    @Id
    @Column(name = "id")
    @GeneratedValue(strategy = GenerationType.SEQUENCE)
    private Long id;

    @Column(name = "firstname")
    private String firstName;

    @Column(name = "lastname")
    private String lastname;

    @Column(name = "email")
    private String email;

    @Column(name = "age")
    private Integer age;

    @Column(name = "salary")
    private Integer salary;

    public Long getId() {
        return id;
    }

    public void setId(Long id) {
        this.id = id;
    }

    public String getFirstName() {
        return firstName;
    }

    public void setFirstName(String firstName) {
        this.firstName = firstName;
    }

    public String getLastname() {
        return lastname;
    }

    public void setLastname(String lastname) {
        this.lastname = lastname;
    }

    public String getEmail() {
        return email;
    }

    public void setEmail(String email) {
        this.email = email;
    }

    public Integer getAge() {
        return age;
    }

    public void setAge(Integer age) {
        this.age = age;
    }

    public Integer getSalary() {
        return salary;
    }

    public void setSalary(Integer salary) {
        this.salary = salary;
    }
}

The repository that will help us access all users

package com.gkatzioura.springdata.jpa.persistence.repository;

import com.gkatzioura.springdata.jpa.persistence.entity.Employee;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;

/**
 * Created by gkatzioura on 6/2/16.
 */
public interface EmployeeRepository extends JpaRepository<Employee,Long>{
}

And a controller that will fetch all the data

package com.gkatzioura.springdata.jpa.controller;

import com.gkatzioura.springdata.jpa.persistence.entity.Employee;
import com.gkatzioura.springdata.jpa.persistence.repository.EmployeeRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;

/**
 * Created by gkatzioura on 6/2/16.
 */
@RestController
public class TestController {

    @Autowired
    private EmployeeRepository employeeRepository;

    @RequestMapping("/employee")
    public List<Employee> getTest() {

        return employeeRepository.findAll();
    }
}

Pretty convenient considering the dependencies and the xml configuration overhead of the past.

You can find the source code on github .

Systemd and Upstart Services

Most linux servers that I use are either Debian based or RedHat based.

A common task is adding daemon services.

Suppose that we want to start a tomcat application on startup

First we shall install tomcat

mkdir /opt/tomcat
groupadd tomcat
useradd -s /bin/false -g tomcat -d /opt/tomcat tomcat
wget http://apache.cc.uoc.gr/tomcat/tomcat-8/v8.0.33/bin/apache-tomcat-8.0.33.tar.gz
tar xvf apache-tomcat-8.0.33.tar.gz
mv apache-tomcat-8.0.33/* /opt/tomcat
rm -r apache-tomcat-8.0.33 apache-tomcat-8.0.33.tar.gz 
cd /opt/tomcat
chgrp -R tomcat conf
chmod g+rwx conf
chmod g+r conf/*
chown -R tomcat work/ temp/ logs/

In case of Systemd we should add a tomcat.service file on /etc/systemd/system.
The file /etc/systemd/system/tomcat.service shall contain

[Unit]
Description=Apache Tomcat Web Application Container
After=syslog.target network.target

[Service]
Type=forking

Environment=JAVA_HOME=/usr/java/default
Environment=CATALINA_PID=/opt/tomcat/temp/tomcat.pid
Environment=CATALINA_HOME=/opt/tomcat
Environment=CATALINA_BASE=/opt/tomcat
Environment='CATALINA_OPTS=-Xms512M -Xmx1024M -server -XX:+UseParallelGC'
Environment='JAVA_OPTS=-Duser.timezone=UTC -Djava.awt.headless=true -Djava.security.egd=file:/dev/./urandom'

ExecStart=/opt/tomcat/bin/startup.sh
ExecStop=/bin/kill -15 $MAINPID

User=tomcat
Group=tomcat

[Install]
WantedBy=multi-user.target

I specified the script to start after syslog and network are enabled
As we can see systemd handles the tomcat as a daemon and kills the pid.
With User and Group we specify the user and the group that the process should be run as.
Systemd will handle the upstart process and kill it using the PID.

to enable and run you have to issue

systemctl enable tomcat
systemctl start tomcat

In case of upstart we should create a tomcat.conf file in /etc/init/
The content of /etc/init/tomcat.conf

description     "Tomcat instance"
author          "Emmanouil Gkatziouras"

respawn
respawn limit 2 5

start on runlevel [2345]
stop on runlevel [!2345]

setuid tomcat
setgid tomcat

env CATALINA_HOME=/opt/tomcat

script
        $CATALINA_HOME/bin/catalina.sh run
end script

post-stop script
        rm -rf $CATALINA_HOME/temp/*
end script

It will start on run levels 2,3,4 or 5
The group and the user id to be executed would be tomcat
After tomcat is stopped the post script block will remove the temp files.
Instead of starting the process inn the background as a daemon,, upstart will handle the process on the foreground.

To start just issue

sudo initctl start tomcat

Async for Node.js

Async module for node.js saves the day when it comes to synchronizing asynchronous tasks, or executing them in a serial manner.

To execute tasks in order, you can use the series method.


var async = require('async');

var somethingAsynchronous = function(callback) { 
    console.log('Called something asynchronous'); 
    callback(); 
};

var somethingElseAsynchronous = function(callback) { 
    console.log('called something else asynchronous'); 
    callback() 
};

async.series([
function(callback) {
  somethingAsynchronous(function(err,result) {
    if(err) {
      callback(err);
    } else {
      callback(result);
    }
  });
},
function(callback) {
  somethingElseAsynchronous(function(err,result) {
    if(err) {
      callback(err);
    } else {
      callback(result);
    }
  });
],function(err,result) {

});

To execute tasks in order and use the results of previous tasks, you have to use the waterfall method.
The last function specified will handle the result of the tasks executed. When an error occurs prior to executing all the specified tasks, then the other tasks will not execute and the last function will handle the error.


var somethingAsynchronous = function(callback) { 
    callback(null,'This is a result'); 
};

var somethingElseAsynchronous = function(firstResult,callback) { 
   callback(null,firstResult+" and this is appended");
};

async.waterfall([
  function (callback){
    somethingAsynchronous(callback);
  },
  function(result,callback) {
    somethingElseAsynchronous(result,callback);
  }
],
function(err,result) {
  console.log('The end result is: '+result);
});

The method parallel is used to execute tasks in parallel and synchronize them after their execution.


var somethingAsynchronous = function(callback) { 
    
    /*
        Asynchronous code
    */
    
    callback(null,'23'); 
};

var somethingElseAsynchronous = function(callback) { 

    /*
        Asynchronous code
    */
    
    callback(null,'sad');
};

async.parallel([
somethingAsynchronous,
somethingElseAsynchronous
],function(err,result){
  console.log('The result is '+result);
});

In case of an array of objects that need to be processed in an asynchronous manner then we can use map.


var items = ['a','b','c','d'];

async.map(items,function(item,callback) {

   callback(null,'Did something asynchronous with '+item);
},
function(err,results){

  results.forEach(function(result) {
      console.log(result);
  });
});

Integrate MongoDB to your Spring project

This article shows how to integrate MongoDB to your spring project through annotation configuration.

We will begin with our Gradle configuration.

group 'com.gkatzioura.spring'
version '1.0-SNAPSHOT'


buildscript {
    repositories {
        mavenCentral()
    }
    dependencies {
        classpath("org.springframework.boot:spring-boot-gradle-plugin:1.2.7.RELEASE")
    }
}

apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'idea'
apply plugin: 'spring-boot'

jar {
    baseName = 'mdb-spring-boot'
    version =  '0.1.0'
}

repositories {
    mavenCentral()
}

sourceCompatibility = 1.8
targetCompatibility = 1.8

dependencies {


    compile("org.springframework.boot:spring-boot-starter-web")


    compile('com.googlecode.json-simple:json-simple:1.1.1')
    compile("org.springframework.boot:spring-boot-starter-actuator")
    compile("org.springframework.data:spring-data-mongodb:1.8.0.RELEASE")
    compile("ch.qos.logback:logback-classic:1.1.3")
    compile("ch.qos.logback:logback-core:1.1.3")
    compile("org.json:json:20150729")
    compile("com.google.code.gson:gson:2.4")

    compile("org.slf4j:slf4j-api:1.7.12")

    testCompile("junit:junit")
    testCompile('org.springframework.boot:spring-boot-starter-test')
}

task wrapper(type: Wrapper) {
    gradleVersion = '2.3'
}

We will proceed with the MongoDB configuration using spring annotations.

package com.gkatzioura.spring.config;

import com.mongodb.MongoClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;

import java.net.UnknownHostException;

/**
 * Created by oSeven3 on 21/10/2015.
 */
@Configuration
public class MongoDbConfiguration {

    public @Bean MongoDbFactory getMongoDbFactory() throws UnknownHostException {
        return new SimpleMongoDbFactory(new MongoClient("localhost",27017),"mongotest");
    }

    public @Bean(name = "mongoTemplate") MongoTemplate getMongoTemplate() throws UnknownHostException {

        MongoTemplate mongoTemplate = new MongoTemplate(getMongoDbFactory());
        return mongoTemplate;
    }

}

Next we will define our model.
We shall create the Location model which will contain the latitude longitude.

package com.gkatzioura.spring.persistence.entities;

import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;

import java.math.BigDecimal;
import java.util.Date;
import java.util.UUID;

@Document(collection = "location")
public class Location {

    @Id
    private String id;
    private BigDecimal latitude;
    private BigDecimal longitude;
    private Date timestamp;

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public BigDecimal getLatitude() {
        return latitude;
    }

    public void setLatitude(BigDecimal latitude) {
        this.latitude = latitude;
    }

    public BigDecimal getLongitude() {
        return longitude;
    }

    public void setLongitude(BigDecimal longitude) {
        this.longitude = longitude;
    }

    public Date getTimestamp() {
        return timestamp;
    }

    public void setTimestamp(Date timestamp) {
        this.timestamp = timestamp;
    }
}

Then we shall create our repository

package com.gkatzioura.spring.persistence.repositories;

import com.gkatzioura.spring.persistence.entities.Location;
import org.springframework.data.repository.CrudRepository;

import java.util.UUID;

public interface LocationRepository extends CrudRepository<Location,String> {
}

Then we shall define our controller

package com.gkatzioura.spring.controller;

import com.gkatzioura.spring.persistence.entities.Location;
import com.gkatzioura.spring.persistence.repositories.LocationRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import java.util.ArrayList;
import java.util.List;

import java.io.IOException;

/**
 * Created by oSeven3 on 21/10/2015.
 */

@RestController
@RequestMapping("/location")
public class LocationController {

    @Autowired
    private LocationRepository locationRepository;

    private static final Logger LOGGER = LoggerFactory.getLogger(LocationRepository.class);

    @RequestMapping(value = "/",method = RequestMethod.POST)
    @ResponseBody
    public String post(@RequestBody Location location) {

        locationRepository.save(location);

        return "OK";
    }

    @RequestMapping(value = "/",method = RequestMethod.GET)
    @ResponseBody
    public List<Location> get() {

        List<Location> locations = new ArrayList<>();
        locationRepository.findAll().forEach(l->locations.add(l));
        return locations;
    }

}

Last but not least our Application class

package com.gkatzioura.spring;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

/**
 * Created by gkatziourasemmanouil on 8/15/15.
 */
@SpringBootApplication
public class Application {

    public static void main(String[] args) {
        SpringApplication.run(Application.class, args);
    }

}

In order to run just run

gradle bootRun

Why I use Node.js

It has been a while since I took up Node.js development.
My early impressions were pretty positive and after some months of Node.js development I have to say that I am amazed.

There are many reasons to continue using Node.js for my projects.

Great for applications with heavy I/O

The asynchronous nature of Node.js enables you to stay focused on your implementation. You don’t need to proceed to any extra configurations as you do with multithreaded environments. Also long I/O operations don’t have to be dispatched to any custom mechanisms, enabling you to avoid any extra costs on development.
Provided your application is mostly based on I/O and less on computation, chances are that Node.js will work for you.

Bootstrapping

Node.js is one of the most bootstrapping experiences that I had with a programming environment. All you need is to have node and npm installed. There are libraries for almost everything you need and the configurations needed are minimal.
Also getting started with the implementation of your Node.js extension takes you no time at all.

Set up Simplicity

All it takes to setup your project is your source code and a package.json file with your dependencies.

Make use of your Javascript skills

Although a backend developer I had to write some javascript in the past. The same applies to other developers I know, even to the most backend focused. Learning a language is an investment.You can make more out of your javascript knowledge, by using Node.js on your projects provided it suits to their needs.

Not another web framework

Node.js is not another web application framework. Due to It’s asynchronous nature and efficiency it can be applied to many problems. For example it can be used as a glue among components of your infrastructure. Also due to heavy development you don’t just have a runtime environment, you have a whole ecosystem with tools that apply to a wide variety of problems.

Conclusion

Node.js is already a part of the tools that I use on a daily basis. However it should be used wise and make sure that it fits your project’s nature.
It is really challenging to deal with the callback hell, but in exchange you get a pretty promising and fast growing ecosystem.

Database migration with Flyway

Flyway in an extremely convenient database migraton tool.
First and foremost it integrates wonderfully with maven.
But one of its biggest assets is the ability to run both sql migration scripts and java migration scripts.

Let us start with a simple maven project

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>migration</groupId>
    <artifactId>com.gkatzioura</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <flyway.version>3.1</flyway.version>
        <mysql.driver.version>5.1.33</mysql.driver.version>
        <database.url>{your jdbc url}</database.url>
        <database.user>{your database user}</database.user>
        <databese.password>{your database password}</databese.password>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.flywaydb</groupId>
            <artifactId>flyway-core</artifactId>
            <version>${flyway.version}</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.flywaydb</groupId>
                <artifactId>flyway-maven-plugin</artifactId>
                <version>${flyway.version}</version>
                <configuration>
                    <baselineOnMigrate>true</baselineOnMigrate>
                    <url>${database.url}</url>
                    <user>${database.user}</user>
                    <password>${databese.password}</password>
                </configuration>
                <dependencies>
                    <dependency>
                        <groupId>mysql</groupId>
                        <artifactId>mysql-connector-java</artifactId>
                        <version>${mysql.driver.version}</version>
                    </dependency>
                </dependencies>
            </plugin>
        </plugins>
    </build>
</project>

When you issue through maven

mvn flyway:migrate

Then flyway will will lookup on the db/migration folder of your target to find any migration files. This folder can be changed by altering the content of the locations inside the configuration of the flyway plugin.

For my first migration file I will use sql

The sql file will be located on the db/migration folder which will reside on the maven resources folder.
The name would be V1_1__Create_Persons.sql
Therefore the full path would be src/main/resources/db/migration/V1_1__Create_Persons.sql

CREATE TABLE Persons
(
PersonID bigint(20) NOT NULL AUTO_INCREMENT,
LastName varchar(255),
FirstName varchar(255),
); 

The second file would be a java file.
It will be located on the package db.migration folder which will reside on the maven src folder.
The name would be V1_2__Insert_Persons.java
Therefore the full path would be src/main/java/db/migration/V1_2__Insert_Persons.java

package db.migration;

import org.flywaydb.core.api.migration.jdbc.JdbcMigration;

import java.sql.Connection;
import java.sql.Statement;

public class V1_2__Insert_Persons implements JdbcMigration
{

	@Override
	public void migrate(Connection connection) throws Exception
	{
		Statement stmt = connection.createStatement();

		stmt.addBatch("INSERT INTO Persons (FirstName,LastName) VALUES ('Emmanouil','Gkatziouras')");

		stmt.addBatch("INSERT INTO Persons (FirstName,LastName) VALUES ('Do not know','this guy')");

		try {
			stmt.executeBatch();
		}
		finally {
			stmt.close();
		}

	}

}

Since we provide a java file it is wise to call flyway by

mvn clean compile flyway:migrate

Aspect Oriented Programming: Qi4j

Qi4j is an emerging framework for composition, injection and aspect oriented programming.

Its main asset is composition however it provides us with tools such as concerns and sideofs in order to apply aspect oriented programming.

I will use the same example with the employees as I did on the previous posts by using Spring and Java EE.
Each employee whether he is a supervisor or a worker will have a different behavior before entering the working area.

First let us start with the interface

 
package com.gkatzioura;

public interface Employee {

    public void enterWorkArea();

}

Also I will create the default implementation for the Employee interface

 
package com.gkatzioura;

import java.util.logging.Logger;

public class EmployeeMixin implements Employee {

    private static final Logger LOGGER = Logger.getLogger(EmployeeMixin.class.getName());

    @Override
    public void enterWorkArea() {

        LOGGER.info("I will enter the work area");

    }

}

Now I will create two transient composites a worker and an employee.

 
package com.gkatzioura;

import org.qi4j.api.composite.TransientComposite;

public interface Worker extends Employee,TransientComposite {
}

 
package com.gkatzioura;

import org.qi4j.api.composite.TransientComposite;

public interface Supervisor extends Employee,TransientComposite {
}

And for each composite I will create a concern. Concern is the equivalent for MethodBeforeAdvice on spring.

 
package com.gkatzioura;

import org.qi4j.api.concern.ConcernOf;
import java.util.logging.Logger;

public class WorkerBeforeConcern extends ConcernOf<Employee> implements Employee{

    private static final Logger LOGGER = Logger.getLogger(WorkerBeforeConcern.class.getName());

    @Override
    public void enterWorkArea() {

        LOGGER.info("Informing the supervisor that I have arrived");

        next.enterWorkArea();
    }
}
 
package com.gkatzioura;

import org.qi4j.api.concern.ConcernOf;
import java.util.logging.Logger;

public class SupervisorBeforeConcern extends ConcernOf<Employee> implements Employee {

    private static final Logger LOGGER = Logger.getLogger(SupervisorBeforeConcern.class.getName());

    @Override
    public void enterWorkArea() {

        LOGGER.info("Check if everything is ok");

        next.enterWorkArea();
    }
}

The last class that remains is to assemble our environment.

 
package com.gkatzioura;

import org.qi4j.api.activation.ActivationException;
import org.qi4j.api.structure.Application;
import org.qi4j.api.structure.Module;
import org.qi4j.bootstrap.*;
import java.util.logging.Logger;

public class Main {

    private static Energy4Java qi4j;
    private static Application application;

    private static final Logger LOGGER = Logger.getLogger(Main.class.getName());

    public static void main(String[] args) throws AssemblyException, ActivationException {

        qi4j = new Energy4Java();
        Application application = qi4j.newApplication(new ApplicationAssembler() {

            @Override
            public ApplicationAssembly assemble(ApplicationAssemblyFactory factory)
                    throws AssemblyException {

                ApplicationAssembly assembly = factory.newApplicationAssembly();

                LayerAssembly rootLayer = assembly.layer("layer");
                ModuleAssembly rootModule = rootLayer.module("module");

                rootModule.transients(Supervisor.class)
                        .withMixins(EmployeeMixin.class)
                        .withConcerns(SupervisorBeforeConcern.class);
                rootModule.transients(Worker.class)
                        .withMixins(EmployeeMixin.class).
                        withConcerns(WorkerBeforeConcern.class);

                return assembly;
            }
        });

        application.activate();

        Module module = application.findModule("layer", "module");

        Employee supervisor = module.newTransient(Supervisor.class);
        Worker worker = module.newTransient(Worker.class);

        supervisor.enterWorkArea();

        worker.enterWorkArea();

    }    
}