Pages

Thursday, April 30, 2020

Time logging in PL/SQL procedures

I need to monitor how much time takes an execution of a PL/SQL procedure. The procedure name together with its execution start and end times are saved in VT_UPDATE_LOG table. For example, some procedure VT_UPDATE sequentially invokes four other procedures VT_SOA_UPDATE,VT_MOLIS_UPDATE, VT_UPDATE_SARS and VT_UPDATE_V_OPA_DEMOG. The time log includes the names of the main and nested procedures:

To each monitored procedure I added calls to auxiliary procedures - VT_LOG_START at the beginning and VT_LOG_END before the end:

create or replace procedure VT_update_sars as
...
begin
VT_LOG_START;
...
commit;
VT_LOG_end;
end;
/

VT_LOG_START and VT_LOG_END executes in their own autonomous transactions:

create or replace PROCEDURE VT_LOG_START  
IS PRAGMA AUTONOMOUS_TRANSACTION ;
name varchar2(30); 
BEGIN
name:=UTL_CALL_STACK.SUBPROGRAM (2)(1);
   insert into VT_UPDATE_LOG (PROCEDURE_NAME)
   values (name);
   commit;
END  ;

create or replace PROCEDURE VT_LOG_END  
IS PRAGMA AUTONOMOUS_TRANSACTION ;
 name varchar2(30); 
BEGIN
name:=UTL_CALL_STACK.SUBPROGRAM (2)(1);
   update VT_UPDATE_LOG set END_DATE=sysdate where PROCEDURE_NAME=name and END_DATE is null 
   and START_DATE=(select max(START_DATE) from VT_UPDATE_LOG where PROCEDURE_NAME=name and END_DATE is null);
   commit;
END  ;

Monday, February 4, 2019

Download an excel file after clicking a button posting a json

So my applications display as a nice table the data stored in a database. After filtering and highlighting the displayed data, the users of my applications want to download some selected columns of the table as an excel file. I use very simple solution. Upon clicking a button, the visible contents of the table are posted via ajax to the backend as a two-dimensional array in the json format. When the ajax response is received, the second ordinary http GET request is made to download the excel file generated with Apache POI from the posted data.

My RESTEasy-based backend REST resource class:

@Path("/excel")
@Produces(APPLICATION_JSON_UTF8)
public class ExcelResource {

    @Context
    HttpServletRequest req;
    static String DATA_SESSION_ATTRIBUTE = "DATA_SESSION_ATTRIBUTE";

    @POST
    @Consumes(MediaType.APPLICATION_JSON)
    public void postData(List<List<String>> rows) throws SQLException, IOException, ParseException {
        req.getSession().setAttribute(DATA_SESSION_ATTRIBUTE, rows);
    }

    @GET
    @Produces(MediaType.APPLICATION_OCTET_STREAM)
    public Object getExcel() throws SQLException, IOException, ParseException {
        List<List<String>> rows = (List<List<String>>) req.getSession().getAttribute(DATA_SESSION_ATTRIBUTE);
        return Response.ok(new Excel().convert(rows))
                .header("content-disposition", "attachment; filename = export.xlsx").build();
    }
}

The REST resource uses auxilliary class Excel. For the sake of simplicity I show a short class producing excel files with plain text without any colors:

public class Excel {

    public StreamingOutput convert(List<List<String>> rows) throws FileNotFoundException, IOException, SQLException {
        return new StreamingOutput() {
            public void write(OutputStream out) throws IOException {
                Workbook wb = new XSSFWorkbook();

                Sheet sheet = wb.createSheet("Data");
                for (int r = 0; r < rows.size(); r++) {
                    List<String> sourceRow = rows.get(r);
                    Row row = sheet.createRow(r);
                    for (int c = 0; c < sourceRow.size(); c++) {
                        Cell cell = row.createCell(c);
                        cell.setCellValue(sourceRow.get(c));
                    }
                }

                wb.write(out);
            }
        };
    }
}

The Javascript part is as simple. The value of each td cell is contained inside a div element (because it is easier to manipulate div css properties such as dimensions in response to user actions). Here is a method converting the selected columns of the HTML table into a two dimensional array. Jquery html() method can be used instead of text() to retain the user-generated tags (e.g. html bold tag labeling some search string matches).

                function getSelectedColumns() {
                    var $headers = $resultTable.find('thead th').filter('.' + constants.SELECTED_HEADER);
                    if (!$headers.length) // nothing is selected
                        $headers = $resultTable.find('thead th');
                    var columnIndexes = [];
                    var rows = [];
                    var row = [];
                    for (var i = 0; i < $headers.length; i++) {
                        var $header = $headers.eq(i);
                        console.log($header.text() + "; " + $header.index());
                        columnIndexes.push($header.index());
                        row.push($header.text());
                    }
                    rows.push(row);
                    var $tableRows = $resultTable.find('tbody tr');
                    for (var i = 0; i < $tableRows.length; i++) {
                        var row = [];
                        var $rowTds = $tableRows.eq(i).find('td div');
                        for (var c = 0; c < columnIndexes.length; c++) {
                            var $div = $rowTds.eq(columnIndexes[c]);
                            row.push($div.text());
                        }
                        rows.push(row);
                    }
                    excel.send(rows);
                }

The array is passed to a short require.js module Excel that posts the json, and upon receiving the response triggers the generated file download by changing window.location:

define(['jquery'],
        function ($) {
            return  function  ( ) {
                this.send = function (params) {
                    $.ajax({
                        method: "POST",
                        url: "api/excel",
                        data: JSON.stringify(params),
                        processData: false,
                        contentType: 'application/json'

                    })
                            .done(onLoaded); 

                };

                function onLoaded( ) {
                    window.location = "api/excel";
                }
            };
        });

Friday, September 21, 2018

SQL query or PL/SQL function to count business days between two dates

The weekend and holidays should be excluded. It is easy to exclude weekends, but the holiday are specific to each place and should be stored in an additional table. I store the holidays in a table HOLIDAYS_VD:

Select the time interval between two dates

The SQL query is adapted from here.

 select count(*) from (
--  select * from (
 select * from (
 select  dayinbetween, to_char( dayinbetween, 'DY' ) weekday from (
 select   startday+level-1 dayinbetween from (
 select startday ,endday-startday diffdays from (
 select   to_date('01.02.2000','DD.MM.YYYY') endday, to_date('29.02.2000','DD.MM.YYYY') startday from dual
 )
 ) connect by level <= diffdays+1
 )
 )where weekday not in ( 'SAT', 'SUN' )--all not weeknds in between
 ) a
 left join  holidays_vd h on a.dayinbetween=h.day
 where h.day is null
 order by dayinbetween; 

It is almost right, but it does not return 0 is the two dates are the same. I did not corrected it and abandoned it because I needed a PL/SQL function. It is below, it is based on the query above and it seems to work fine.

PL/SQL function subtracting dates and exluding weekends and holidays
create or replace PACKAGE indicators AS
TYPE DateListType IS   TABLE OF DATE;
function subtract_business_dates(    startdate DATE,    enddate   DATE) return integer;
END  ;
create or replace PACKAGE BODY  indicators AS
 function subtract_business_dates(
    startdate DATE,
    enddate   DATE) return integer
IS
  result INTEGER:=1;
 
  dateList DateListType:=DateListType();
  difference_days INTEGER;
BEGIN
  difference_days:=trunc(enddate)-trunc(startdate);
  if difference_days<0 then
  RAISE_APPLICATION_ERROR(-20000, 'The first day is after the second date');
  end if;
    if difference_days=0 then
 return 0;
  end if;
 
  dateList.extend(difference_days);
  FOR i IN dateList.first..dateList.last
  LOOP
    dateList(i):=startdate+i;
 
  END LOOP;
  
    select count(*) into result from (
 
   select * from (
    select  dayinbetween, to_char( dayinbetween, 'DY' ) weekday from (
   select  COLUMN_VALUE dayinbetween  from (table(dateList)) 
      )
    )where weekday not in ( 'SAT', 'SUN' )--all not weeknds in between
   ) a
   left join  holidays_vd h on a.dayinbetween=h.day
    where h.day is null
 
   ;
   return result;
END;
END  ;

Some tests to make sure that the function works well. Note, the table with holidays includes a test date 14.02.2000.

select indicators.subtract_business_dates(to_date('01.02.2000','DD.MM.YYYY'),to_date('29.02.2000','DD.MM.YYYY')) from dual;
select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('01.02.2000','DD.MM.YYYY')) from dual; --exception

select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('03.02.2000','DD.MM.YYYY')) from dual;--thu
select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('04.02.2000','DD.MM.YYYY')) from dual;--fri
select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('05.02.2000','DD.MM.YYYY')) from dual;--sat
select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('06.02.2000','DD.MM.YYYY')) from dual;--sun
select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('07.02.2000','DD.MM.YYYY')) from dual;--mon
select indicators.subtract_business_dates(to_date('03.02.2000','DD.MM.YYYY'),to_date('08.02.2000','DD.MM.YYYY')) from dual;--tue


select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('04.02.2000','DD.MM.YYYY')) from dual;--fri
select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('05.02.2000','DD.MM.YYYY')) from dual;--sat
select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('06.02.2000','DD.MM.YYYY')) from dual;--sun
select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('07.02.2000','DD.MM.YYYY')) from dual;--mon
select indicators.subtract_business_dates(to_date('04.02.2000','DD.MM.YYYY'),to_date('08.02.2000','DD.MM.YYYY')) from dual;--tue

select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('10.02.2000','DD.MM.YYYY')) from dual;--thu
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('11.02.2000','DD.MM.YYYY')) from dual;--fri
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('12.02.2000','DD.MM.YYYY')) from dual;--sat
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('13.02.2000','DD.MM.YYYY')) from dual;--sun
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('14.02.2000','DD.MM.YYYY')) from dual;--mon holiday
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('15.02.2000','DD.MM.YYYY')) from dual;--tue
select indicators.subtract_business_dates(to_date('10.02.2000','DD.MM.YYYY'),to_date('16.02.2000','DD.MM.YYYY')) from dual;--wed

Thursday, May 24, 2018

Copying data from Oracle to MySQL or from any to any database

There is no official tool to export data from Oracle to MySQL database. Recurrently I need to copy a huge table from the source Oracle to MySQL database. It seems to me that configuring any available tools is more complicated than just using a very simple and therefore reliable java class. The class simply executes in the target database an INSERT statement for each row of the SELECT query executed in the source database. I execute such an tiny application as a background job to copy millions of rows from one VM to another overnight.

The prerequisite is that the target table exists in the target database. But DDL is the easiest part of an export.

public class Main {

    static String SOURCE_TABLE_NAME = "ONM_MOL_DATA", TARGET_TABLE_NAME = "ONM_MOL_DATA_COPY";
    String SQL = "select * from " + SOURCE_TABLE_NAME; 
    String INSERT = "insert into " + TARGET_TABLE_NAME + " values()";
    String DELETE = "delete from " + TARGET_TABLE_NAME;

    void run() throws SQLException, IOException {

        try (Connection oracleCon = ConnectionFactory.getOracleConnection();
                Connection mySQLCon = ConnectionFactory.getMySQLConnection();
                Statement oracleStmt = oracleCon.createStatement();
                ResultSet oracleRs = oracleStmt.executeQuery(SQL)) {
            ResultSetMetaData md = oracleRs.getMetaData();

            String mySQLInsert = getInsert(md.getColumnCount());

            // clean
            Statement mySQLDeleteStmt = mySQLCon.createStatement();
            mySQLDeleteStmt.executeUpdate(DELETE);

            // copy
            mySQLCon.setAutoCommit(false);
            System.out.println(mySQLInsert);
            PreparedStatement mySQLInsertStmt = mySQLCon.prepareStatement(mySQLInsert);

            int row = 0;
            while (oracleRs.next()) {
                for (int c = 1; c <= md.getColumnCount(); c++) {
                    mySQLInsertStmt.setObject(c, oracleRs.getObject(c));
                }
                mySQLInsertStmt.executeUpdate();
                row++;
                if (row % 100000 == 0) {
                    System.out.println("row=" + row);
                }
            }
            mySQLCon.commit();
        }

    }

    String getQuestionMarks(int count) {
        String[] ar = new String[count];
        Arrays.fill(ar, "?");
        return String.join(",", ar);
    }

    String getInsert(int count) {
        return INSERT.replace("values()", "values(" + getQuestionMarks(count) + ")");

    }

    public static void main(String... args) throws SQLException, IOException {
        new Main().run();
    }
}

There is nothing special in ConnectionFactory class:

public class ConnectionFactory {

    public static Connection getMySQLConnection() throws SQLException {
         return DriverManager.getConnection(MYSQL_URL);
    }

    public static Connection getOracleConnection() throws SQLException {
        return DriverManager.getConnection(ORACLE_URL);
    }
}

Executing a jar as a background job on Linux

Often I need to leave overnight a long-running Java application. This is done with help of nohup command that blocks SIGHUP signals to the preceeded application and thereby prevents the application from exiting when the terminal session is terminated:

nohup java -jar CopyOracleToMySQL-1.0.jar > copy.out 2>&1 &

The STDOUT and STDERR output is saved to the file copy.out.

Tuesday, May 8, 2018

Copying a table to another MySQL database

Unfortunately, in MySQL there are no decent export tools similar to Data Pump in Oracle. Luckily one can copy either the entire database data folder or only the required individual tables. The text is adapted from the MySQL manual.

  1. In the source database, first generate the CREATE statement for the target table so that an identical table can be created in the target database. Then execute an SQL command:

    FLUSH TABLES TEMP FOR EXPORT;

    A TEMP.cfg file is created in the MySQL data directory. This file has to be copied to the target machine. I first copy it to /tmp, so that it becomes more accessible.

    sudo cp TEMP.{ibd,cfg} /tmp
    cd /tmp
    chmod 644 TEMP.*
    

    Then execute an SQL command. The previously created TEMP.cfg disappears.

    UNLOCK TABLES;
  2. In the target database, execute SQL to create an identical table using the DDL from the source database and discard its tablespace:

    CREATE TABLE `TEMP` (
      `INTERVENTION_LIBELLE` varchar(93) NOT NULL,
      ...
      KEY `INTERVENTION_LIBELLE_idx` (`INTERVENTION_LIBELLE`),
    ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
    ALTER TABLE TEMP DISCARD TABLESPACE;

    Copy the files from the source machine to the local MySQL data folder:

    cd /data1/mysql/axya/
    scp test@source-machine:/tmp/TEMP.{ibd,cfg} .
    chmod 640 TEMP.*
    chown mysql:mysql TEMP.*
    

    Execute the last SQL command after which the data becomes usable.

    ALTER TABLE TEMP IMPORT TABLESPACE;

How to cast varchar to int or datetime in MySQL

To convert a varchar value into int:
update TEMP set PATIENT_ID=cast(PATIENT_ID_ORIG as UNSIGNED);
To convert a varchar value into datetime

One need to use a troublesome function STR_TO_DATE. The problem with this function is that it never produces errors, instead it produces NULL and a warning that has to be revealed by an additional statement:

SELECT STR_TO_DATE('05/11/2012 08:30:00','%d/%m/%Y %H:%i:%s');
2012-11-05 08:30:00

SELECT STR_TO_DATE('505/11/2012 08:30:00','%d/%m/%Y %H:%i:%s');
null

SHOW WARNINGS;
Warning 1411 Incorrect datetime value: '505/11/2012 08:30:00' for function str_to_date

There might also be crazy conversion to zero dates. I converted valid values like that:

SET  sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION,STRICT_ALL_TABLES';
SELECT @@SESSION.sql_mode;
ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION

update TEMP set ENTREE_SALLE =STR_TO_DATE(ENTREE_SALLE_ORIG,'%d/%m/%Y %H:%i:%s');

Tuesday, April 24, 2018

How to recover passwords in SQL developer

I just want to note down and share a useful link https://github.com/maaaaz/sqldeveloperpassworddecryptor with a python script that not only contains a comprehensive user guide but also works. Before stumbling upon it, I also tried some SQL developer plugins that turned out not to work.

Monday, April 23, 2018

Change schema in Oracle

Even though I do it regularly, I always forget the commands.

To display the current user and the current schema:

SELECT sys_context('USERENV','SESSION_USER') as "USER NAME", sys_context('USERENV', 'CURRENT_SCHEMA') as "CURRENT SCHEMA" FROM dual;

To change the current schema, instead of prefixing the table names:

ALTER SESSION SET CURRENT_SCHEMA = hdm;

Add MySQL or Oracle driver to create a datasource in Wildfly 10

In any other server one simply puts any jar into the library folder to make included in the classpath. In contrast, in Wildfly there is no such a folder - it is based on a modular classloading architecture. One needs to create an individual folder for any additional jar, which is called a module. The official documentation recommends including the required jar in the web archive instead of creating modules. However, this is impossible if one needs to create as datasource, which depends on a driver in the server classpath (A datasource can alternatively be deployed, which I find inconvenient).

  1. Create a new folder path$JBOSS_HOME/modules/system/layers/base/com/mysql/main

    Inside the created folder, create a file module.xml with contents:

    <?xml version="1.0" encoding="UTF-8"?>
    <module xmlns="urn:jboss:module:1.3" name="com.mysql">
         <resources>
            <resource-root path="mysql-connector-java-5.1.42-bin.jar"/>
         </resources>
          <dependencies>
       <module name="marian.mysqllogger"/> 
            <module name="javax.api"/> 
            <module name="javax.transaction.api"/>
        </dependencies>
    </module>

    Note, module marian.mysqllogger is obviously optional, it logs the executed SQL commands. It is described in my previous posts.

  2. Create a new folder path$JBOSS_HOME/modules/system/layers/base/com/oracle/main

    Inside the created folder, create a file module.xml with contents:

    <?xml version="1.0" encoding="UTF-8"?>
    <module xmlns="urn:jboss:module:1.3" name="com.oracle">
         <resources>
            <resource-root path="ojdbc6.jar"/>
         </resources>
    
        <dependencies>
            <module name="javax.api"/> 
            <module name="javax.transaction.api"/>
        </dependencies>
    </module>
  3. Copy a MySQL driver jar indicated in the xml file (e.g. mysql-connector-java-5.1.42-bin.jar) into folder $JBOSS_HOME/modules/system/layers/base/com/mysql/main. The oracle driver indicated in the xml file (e.g. ojdbc6.jar) should also be placed into the folder $JBOSS_HOME/modules/system/layers/base/com/oracle/main with the xml file.

  4. Finally, the datasources can be created in the Administration Console after the Wildfly is restarted.

    Alternatively, you can directly edit $JBOSS_HOME/standalone/configuration/standalone.xml. Extend the part dedicated to datasources:

            <subsystem xmlns="urn:jboss:domain:datasources:4.0">
                <datasources>
                     <datasource jta="true" jndi-name="java:/OracleDS" pool-name="OracleDS" enabled="true" use-ccm="true">
                        <connection-url>jdbc:oracle:thin:@localhost:1521:orcl2</connection-url>
                        <driver-class>oracle.jdbc.driver.OracleDriver</driver-class>
                        <driver>oracle</driver>
                        <security>
                            <user-name>username</user-name>
                            <password>password</password>
                        </security>
                        <validation>
                            <valid-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.oracle.OracleValidConnectionChecker"/>
                            <background-validation>true</background-validation>
                            <stale-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.oracle.OracleStaleConnectionChecker"/>
                            <exception-sorter class-name="org.jboss.jca.adapters.jdbc.extensions.oracle.OracleExceptionSorter"/>
                        </validation>
                    </datasource>
                    <datasource jta="true" jndi-name="java:/MySqlDS" pool-name="MySqlDS" enabled="true" use-ccm="true">
                        <connection-url>jdbc:mysql://localhost:3306/wildfly?useSSL=false&profileSQL=true&logger=com.mysql.jdbc.log.MySlf4JLogger</connection-url>
                        <driver-class>com.mysql.jdbc.Driver</driver-class>
                        <driver>mysql</driver>
                        <security>
                            <user-name>username</user-name>
                            <password>password</password>
                        </security>
                        <validation>
                            <valid-connection-checker class-name="org.jboss.jca.adapters.jdbc.extensions.mysql.MySQLValidConnectionChecker"/>
                            <background-validation>true</background-validation>
                            <exception-sorter class-name="org.jboss.jca.adapters.jdbc.extensions.mysql.MySQLExceptionSorter"/>
                        </validation>
                    </datasource>
                    <drivers>
                        <driver name="oracle" module="com.oracle">
                            <driver-class>oracle.jdbc.driver.OracleDriver</driver-class>
                        </driver>
                        <driver name="mysql" module="com.mysql">
                            <driver-class>com.mysql.jdbc.Driver</driver-class>
                        </driver>
                    </drivers>
                </datasources>
            </subsystem>

    com.mysql.jdbc.log.MySlf4JLogger is a handy logger of executed SQL statements. It is loaded from marian.mysqllogger module.

    Restart the Wildfly.

Monday, March 26, 2018

Loading a long value into a String variable from a text file

The max length of a string seems to equal Integer.MAX_VALUE. However, a string literal length in Java is represented by two bytes implying that it cannot be over 65535 bytes. If you try to compile a class with a longer string, an error constant string too long will occur. Sometimes, for example for tests, one needs to use longer String values. Such values can be loaded from a file.

Suppose, a String variable has to be assigned the entire contents of a output.xml file, which contains ~400,000 characters and is saved in the classpath. Method inputStreamToString loads the contents from an InputStream into a String variable:

public class FileUtils {

    public String inputStreamToString(InputStream is) throws IOException {
        StringBuilder sb = new StringBuilder();
        char[] buffer = new char[1024 * 8];
        try (BufferedReader in = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) {
            int length;
            while ((length = in.read(buffer)) > -1) {
                sb.append(buffer, 0, length);
            }
        }
        return sb.toString();
    }
}

A test class:

public class FileUtilsTest {

    FileUtils i = new FileUtils();

    @Test
    public void testInputStreamToStringFromResourceFile() throws IOException {
        String resp = i.inputStreamToString(getClass().getResourceAsStream("/output.xml"));
        System.out.println(resp.length());// 358,830
        assertEquals(resp.length(), 358779);
    }

    @Test
    public void testInputStreamToStringFromString() throws IOException {
        System.out.println(Charset.defaultCharset()); // windows-1252
        String str = "this is a test string to be converted to InputStream";
        String copy = i.inputStreamToString(new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8)));
        assertEquals(str.length(), copy.length());
    }
}

Another sorter possibility:

    public String readFileToString(String fileName) throws IOException {
        Path filePath = getPathInResources(fileName);
        return new String(Files.readAllBytes(filePath), StandardCharsets.UTF_8);
    }
How to convert a String to an InputStream
InputStream is=new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8));

The code is used in the second test above.

How to save a String to a file
 Files.write(Paths.get("src\\test\\resources\\xml0.xml"),str.getBytes(StandardCharsets.UTF_8));

Wednesday, March 14, 2018

Changing font size and default language in SQL developer or Data Integrator

Changing the default tiny font size

By default the letters in SQL developer or Data integrator are hardly visible. The steps to increase the font size are quite the same on windows and linux. In %userprofile%/AppData on Windows or $HOME on Linux search for a file ide.properties. One file will be found in SQL developer folder, whereas in ODI folder two files will be found (No idea why because it does not matter).

In SQL developer's file uncomment a line with Ide.FontSize=18 and set the convienient font size (no less than 18).

Add the the same line to the two ODI files which seems identical on Linux, and different on Windows (the last two pictures).

Restart the applications.

Changing the default language to English

Java uses the default locale of the computer. To change the language, one needs to change JVM system variables. It can be done in a configuration file ide.conf located in the installation folder of SQL developer or ODI, e.g C:\oracle\Middleware\Oracle_Home. Add to lines the end of the file and then restart the application:

AddVMOption -Duser.language=en
AddVMOption -Duser.country=US

Thursday, March 8, 2018

Adding a datasource to Tomcat and specifying it in persistence.xml

To add a datasource connecting to Oracle database, save the Oracle driver ojdbc7.jar into CATALINA_HOME/lib. Then add a line with the connection details into CATALINA_HOME/conf/context.xml:

<Resource name="jdbc/saphirOracleDB" auth="Container" type="javax.sql.DataSource"
        maxTotal="20" maxIdle="30" maxWait="10000"
        username="username" password="password" driverClassName="oracle.jdbc.OracleDriver"
        url="jdbc:oracle:thin:@//hostname:1555/servicename"/>

In a sample persistence.xml depending on the created datasource, the reference to the datasource is obtained using JNDI name java:/comp/env/jdbc/saphirOracleDB

<?xml version="1.0" encoding="UTF-8"?>
<persistence version="2.1" xmlns="http://xmlns.jcp.org/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd">
  <persistence-unit name="Saphir" transaction-type="RESOURCE_LOCAL">
    <provider>org.eclipse.persistence.jpa.PersistenceProvider</provider>
    <non-jta-data-source>java:/comp/env/jdbc/saphirOracleDB</non-jta-data-source>
    <class>entities.sqlresultmapping.DummyForMapping</class>
  </persistence-unit>
</persistence>

The entity manager can be obtained in the web application by using the specified persitence unit name:

    private static EntityManagerFactory emf = Persistence.createEntityManagerFactory("Saphir");
    private static EntityManager em = emf.createEntityManager();

Removing accents and other diacritical marks from unicode text so as to convert it into English letters

Often I need to convert unicode text, e.g. French, into English letters. The general way to remove diacritical marks is to decompose characters into chars representing letters and separately marks using Normalizer with form NFD, and then remove all chars holding diacritical signs using a regular expression \p{InCombiningDiacriticalMarks}+ matching the "Combining Diacritical Marks" unicode character block.

The sample class below uses as the input a meaningless text made up of french words with various accents:

public class Clean   {

    static void describe(String str) {
        System.out.println(str + " " + str.length());
    }

    public static void main(String[] args) {
        String str = "«J'ai levé la tête. Il doit être français». Il n'a pensé à lui ôter l'âge et se met à nager âgé.";
        describe(str);
        String normalizedString = Normalizer.normalize(str, Normalizer.Form.NFD);
        // the regexp corresponds to Character.UnicodeBlock.COMBINING_DIACRITICAL_MARKS
        String noDiacriticalMarks = normalizedString.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
        describe(normalizedString);
        describe(noDiacriticalMarks);
    }
}

In the output the first line is the original string. The second is the same string but normalized. Note, the accents are stored as individual characters which are eliminated in the third line. Each line contains the length of the string.

«J'ai levé la tête. Il doit être français». Il n'a pensé à lui ôter l'âge et se met à nager âgé. 96
«J'ai levé la tête. Il doit être français». Il n'a pensé à lui ôter l'âge et se met à nager âgé. 107
«J'ai leve la tete. Il doit etre francais». Il n'a pense a lui oter l'age et se met a nager age. 96
Remove diacritical marks from a string in Javascript

Analogous approach in javascript taken from here:

const str = "Crème Brulée"
console.log(str + "=>" + str.normalize('NFD').replace(/[\u0300-\u036f]/g, ""));

Sunday, February 18, 2018

Using Backbone Model to post its attributes together with files as multipart form data

Backbone is a tiny and very flexible library for REST-based front end such as single page applications. But I did not find many examples of how to save a model containing both text values and selected files. As the data contains files, it has to be encoded as multipart form data including binary files and the text attributes in json format. A possible short Javascript code in the Backbone model is quite simple:

Backbone.Model.extend({
selectedFiles: [], // the photos to be uploaded
  
saveMultipart: function () {
    var formData = new FormData();
    selectedFiles.forEach(function (photo) {
        formData.append('photo', photo, photo.originalNameSize.nameWithoutExtension);
    });
    formData.append('dataObject', JSON.stringify(this.toJSON( )));
    var options = {
        data: formData,
        contentType: false
    };
    this.save(null, options);
}
// other useful functions
            
});

To submit the model together with the files stored in selectedFiles, method saveMultipart should be called instead of the usual save.

In a JAX-RS-based REST backend the json is extracted from the part arbitrarily named here dataObject and parsed into java classes, whereas the files from the remaining parts are processed in some other way.

A working sample application that is not at all Backbone-based but includes also an example of such a Backbone model-based data submission is stored here. It will be described in more detail in a post about client-side image resizing and uploading them to REST resources.

Saturday, February 17, 2018

Conversion between Date and LocalDateTime

I do not know why the conversion between Date and LocalDate is so complicated and ugly. But I have to do it increasingly often. So this note is on how to convert between Date, LocalDate and milliseconds.

public static void main(String[] args) {
    // convert from Date to LocalDateTime
    Date d = new Date();
    LocalDateTime ld = LocalDateTime.ofInstant(d.toInstant(), ZoneId.systemDefault());

    // convert from LocalDateTime to Date  
    Date d2 = Date.from(ld.atZone(ZoneId.systemDefault()).toInstant());
    System.out.println("date : " + d);
    System.out.println("ldate: " + ld);
    System.out.println("date2: " + d2);

    // compare milliseconds
    System.out.println("millis : " + d.getTime());
    System.out.println("lmillis: " + ld.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli());
}

The output is:

date : Sat Feb 17 14:41:00 CET 2018
ldate: 2018-02-17T14:41:00.941
date2: Sat Feb 17 14:41:00 CET 2018

millis : 1518874860941
lmillis: 1518874860941

Thursday, February 1, 2018

CORS filter for JAX-RS

It should be registered as a singleton in the Application subclass.

public class CORSResponseFilter implements ContainerResponseFilter {

    Logger logger = LoggerFactory.getLogger(getClass().getName());

    @Override
    public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) throws IOException {
        MultivaluedMap headers = responseContext.getHeaders();

        String origin = requestContext.getHeaderString("Origin");
        if (origin != null) {
            headers.add("Access-Control-Allow-Origin", requestContext.getHeaderString("Origin"));
        } 

        headers.add("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT");
        headers.add("Access-Control-Allow-Headers", "X-Requested-With, Content-Type, X-Codingpedia");
        headers.add("Access-Control-Allow-Credentials", true);
    }
}

RESTEasy also provides a CORS filter class. I do not know why it is not only response but also request filter. It is used as any other filter but needs configuration of all the headers to be added.

CorsFilter filter = new CorsFilter();
filter.getAllowedOrigins().add("*");

Configuring Jackson object mapper in RESTEasy

While transforming between Java classes and JSON, Jackson library considers both its own annotations and the conventional JAXB annotations. The final result maybe not obvious. Let's consider a sample class from a sample application:

@XmlRootElement
@XmlAccessorType(XmlAccessType.PUBLIC_MEMBER)
public class MyBean {

    String firstName, lastName, fullName;

    public MyBean(String firstName, String lastName) {
        this.firstName = firstName;
        this.lastName = lastName;
    }

    public MyBean() {
    }

    @XmlElement(name = "jaxbFirstName")
    public String getFirstName() {
        return firstName;
    }

    public void setFirstName(String firstName) {
        this.firstName = firstName;
    }

    @JsonProperty("jacksonLastName")
    public String getLastName() {
        return lastName;
    }

    public void setLastName(String lastName) {
        this.lastName = lastName;
    }

    @XmlElement(name = "jaxbFullName")
    @JsonProperty("jacksonFullName")
    public String getFullName() {
        return firstName + " " + lastName;
    }

    public void setFullName(String fullName) {
        this.fullName = fullName;
    }
}

Jackson set up by RESTEasy prefers its own annotations over the JAXB ones. Note, the default object mapper ignores JAXB annotations (see below). The default output of an object mapper will be:

{"jaxbFirstName":"John","jacksonLastName":"Smith","jacksonFullName":"John Smith"}
Configuring Jackson used by JAX-RS

To configure Jackson, one has to provide his own configured instance by means of a context provider implementing ContextResolver interface. The provider produces an ObjectMapper instance (according to the authors it can be reused) that is to be used by JAX-RS. The following class from another sample application provides a object mapper that produces nicely formatted JSON.

public class MyObjectMapperProvider implements ContextResolver {

    ObjectMapper objectMapper = createObjectMapper();

    @Override
    public ObjectMapper getContext(final Class type) {
        return objectMapper;
    }

    ObjectMapper createObjectMapper() {
        ObjectMapper objectMapper = new ObjectMapper();
        objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
        return objectMapper;
    }
}

And the custom provider has to be registered as a singleton:

@ApplicationPath("/api")
public class MyApplication extends Application {

    public MyApplication() {
        singletons = new HashSet<Object>() {
            {
                add(new MyObjectMapperProvider());
            }
        };
        resources = new HashSet<Class<?>>() {
            {
                add(MyResource.class);
            }
        };
    }

    Set<Object> singletons;
    Set<Class<?>> resources;

    @Override
    // note, it is called twice during RESTEasy initialization, 
    public Set<Class<?>> getClasses() {
        System.out.println(">getClasses()");
        return resources;
    }

    @Override
    // note, it is called twice during RESTEasy initialization, 
    public Set<Object> getSingletons() {
        System.out.println(">getSingletons()");
        return singletons;
    }
}

The json received from the service is formatted now:

{
  "firstName" : "John",
  "jacksonLastName" : "Smith",
  "jacksonFullName" : "John Smith"
}

Note, unlike the default Jackson object mapper in RESTEasy, the default Jackson object mapper (created as above ObjectMapper objectMapper = new ObjectMapper() ) does not recognize JAXB annotations.

Enabling JAXB annotations in Jackson object mapper

The customized object mapper instance has to be further configured in the context provider shown above:

    ObjectMapper createObjectMapper() {
        ObjectMapper objectMapper = new ObjectMapper();
        objectMapper.enable(SerializationFeature.INDENT_OUTPUT).registerModule(new JaxbAnnotationModule());
        return objectMapper;
    }

Now JAXB annotations are priveleged over Jackson ones in the produced JSON:

{
  "jaxbFirstName" : "John",
  "jacksonLastName" : "Smith",
  "jaxbFullName" : "John Smith"
}
Disabling unconventional Jackson annotations

The customized object mapper instance has to be further configured in the context provider shown above:

    ObjectMapper createObjectMapper() {
        ObjectMapper objectMapper = new ObjectMapper();
        objectMapper.enable(SerializationFeature.INDENT_OUTPUT).setAnnotationIntrospector(new JaxbAnnotationIntrospector());;
        return objectMapper;
    }

Now Jackson are ignored in the produced JSON:

{
  "lastName" : "Smith",
  "jaxbFirstName" : "John",
  "jaxbFullName" : "John Smith"
}
Ignore empty properties during serialization

Another usefull setting feature preventing nulls and empty collections from being included into resulting json.

public class MyObjectMapperProvider implements ContextResolver {
    
    static ObjectMapper objectMapper = createObjectMapper();

    @Override
    public ObjectMapper getContext(final Class type) {
        return objectMapper;
    }
    
    static ObjectMapper createObjectMapper() {
        ObjectMapper objectMapper = new ObjectMapper();
        objectMapper.enable(SerializationFeature.INDENT_OUTPUT).setAnnotationIntrospector(new JaxbAnnotationIntrospector()).setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
        return objectMapper;
    }
    
    public static ObjectMapper getObjectMapper() {
        return objectMapper;
    }
}

Sign in with Google into a web application using the server flow.

This post is based on the Google documentation on Google's OAuth 2.0 authentication, where OpenID Connect seems to be the most pertinent and comprehensive section. But overall the documentation is quite confusing. So I summarize it here. A sample Java web application is in GitHub.

First, obtain OAuth 2.0 credentials and set redirect URIs in the Google API Console:

Authentication comes down to obtaining an id token via HTTPS from Google. The most commonly used approaches for authenticating a user Google documentation calls the server/basic flow and the implicit flow:

  • The server/basic flow allows the back-end server of an application to identify the user.
  • The implicit flow is when a client-side JavaScript app accesses APIs directly and not via its back-end server.

The major difference is that in implicit flow tokens are sent as url hash, whereas in server flow tokens are sent as url parameters. Also unlike the implicit flow, the server flow requires client secret. Here I illustrate the server flow for authentication. The implicit flow using Google API Javascript library I demonstrated in a previous post.

When a user tries to sign in with Google, the application has to:

  1. Send an authentication request with the appropriate parameters to Google authorization_endpoint.
    • client_id from the API Console.
    • response_type should be code, which launches a Basic flow. If the value is token id_token or id_token token, launches an Implicit flow, requiring the use of Javascript at the redirect URI to retrieve tokens from the URI #fragment.
    • nonce A random value generated by your app that enables replay protection.
    • scope should be openid email. The scope value must begin with the string openid and then include profile or email or both.
    • redirect_uri the url to which browser will be redirected by Google after the user completes the authorization flow. The url must exactly match one of the redirect_uri values listed in the API Console. Even trailing slash / matters.
    • state should include the value of the anti-forgery unique session token, as well as any other information needed to recover the context when the user returns to your application, e.g., the starting URL.

    A sample URL from the link, which is supposed to be a button, my sample application:

    https://accounts.google.com/o/oauth2/v2/auth?client_id=517342657945-qv1ltq618ijj9edusgnnbpmbghkatc2q.apps.googleusercontent.com&redirect_uri=http://localhost:8080/test/server&scope=email&response_type=code&nonce=1845254249&state=u72lptpmf78lqv7nuid23l8hfa

    Google handles the user authentication and user consent. After a user signs in to Google, the browser is redirected to the indicated url with two appended parameters:

    http://localhost:8080/test/server?state=u72lptpmf78lqv7nuid23l8hfa&code=4/H3hLypL85UqpnKUT3po5vWIeYyZD4oPBjNyGk_rcYNI#

    Note, if a user has one Gmail account and is logged in, the user will not see any Google consent page and will be automatically redirected. But if the user has several accounts or is logged out, he has to choose one or log in on the Google page.

    If the user approves the access request, an authorization code is added to redirect_uri. Otherwise, the response contains an error message. Either authorization code or error message appear on the query string.

  2. Confirm that the state received from Google matches the state value sent in the original request.
  3. Exchange the authorization code for an access token and ID token.

    The response includes a one-time code parameter that can be exchanged for an access token and ID token. For that, the server sends POST request to the token_endpoint. The request must include the following parameters in the POST body:

    • code the received authorization code
    • client_id from the API Console
    • client_secret from the API Console
    • redirect_uri specified in the API Console
    • grant_type equals authorization_code

    A sample request by my sample application:

    POST https://www.googleapis.com/oauth2/v4/token
    
    code=4/H3hLypL85UqpnKUT3po5vWIeYyZD4oPBjNyGk_rcYNI&client_id=517342657945-qv1ltq618ijj9edusgnnbpmbghkatc2q.apps.googleusercontent.com&client_secret=0PVwYgPuLpH3PFHljPkbtJeP&redirect_uri=http://localhost:8080/test/server&grant_type=authorization_code

    A successful response includes a JSON with fields:

    • access_token A token that can be sent to a Google API.
    • id_token containing the information about the user
    • expires_in The remaining lifetime of the access token.
    • token_type always has the value Bearer.

    The response to the request above was:

    {"access_token":"ya29.GlxTBVKmo1YcUb_gyYstxB1Q-YpYzVviVp-uJKvU6CNfyhGUtD8oZJhliX9YADuKjebSZFxK1yL--TRxW_POT5vyBh9L43tlmzERrU8cwSSkl9U3n0zkY4nbHcnvoA","token_type":"Bearer","expires_in":3600,"id_token":"eyJhbGciOiJSUzI1NiIsImtpZCI6IjI2YzAxOGIyMzNmZTJlZWY0N2ZlZGJiZGQ5Mzk4MTcwZmM5YjI5ZDgifQ.eyJhenAiOiI1MTczNDI2NTc5NDUtcXYxbHRxNjE4aWpqOWVkdXNnbm5icG1iZ2hrYXRjMnEuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJhdWQiOiI1MTczNDI2NTc5NDUtcXYxbHRxNjE4aWpqOWVkdXNnbm5icG1iZ2hrYXRjMnEuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJzdWIiOiIxMTY4NjczMDQ5NzEyODQ3OTg1NjYiLCJlbWFpbCI6Im1hcmlhbi5jYWlrb3Zza2lAZ21haWwuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsImF0X2hhc2giOiJLQ2tlX051V1NXMzRLYng5XzhtRm9BIiwibm9uY2UiOiIxODQ1MjU0MjQ5IiwiZXhwIjoxNTE3NDM2MjYwLCJpc3MiOiJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20iLCJpYXQiOjE1MTc0MzI2NjB9.MR6tnc5qnnL1VZmmONp4brWj5C9FQqIopIqa-UPc9NMz_qbAP37VpCSLn3CDdUouXCG7XpjTQbXZRvg9ZUP9-v_J5K9Crgp75csrVIBoWVre_yjncoFusEAc0efQOLUFwyKLV6cUsTMiUVoAkwWG6tOe_ZwXshq3-psblqpwwJxyILFFE2QiJviLH622S9YPBv0LA-tdTeqXOzt7yAK_cBeY-dnXXJfwVErY0yCGFAGOWf3WtTtzaWzxBshpMae9jRazsd3qgyiVtahD8IlfPUHhpYzDZs0RKuXBbfBF_wuB-cfyNhtuAdJfdaVNcWGLqqkQ4qkJGFhP8L5VMe1U_g"}
    
  4. Obtain user information from the ID token

    An ID Token is a JWT (JSON Web Token) - a signed Base64-encoded JSON object. Since it is received directly from Google over HTTPS it does not need to be validated. The encoded JSON contains the following fields:

    • email The user's email address provided only if your scope included email
    • profile The URL of the user's profile page provided when scope included profile
    • name The user's full name, in a displayable form provided when scope included profile
    • nonce The value of the nonce supplied by your app in the authentication request. You should enforce protection against replay attacks by ensuring it is presented only once.

    A simple Java code to extract the email from an id token:

    public JsonObject decodeIdToken(String idToken) {
        String secondStr = idToken.split("\\.")[1];
        byte[] payloadBytes = Base64.getDecoder().decode(secondStr);
        String json = new String(payloadBytes);
        JsonReader jsonReader = Json.createReader(new StringReader(json));
        return jsonReader.readObject();
    }

    The id token above was decoded by this method into:

    {"azp":"517342657945-qv1ltq618ijj9edusgnnbpmbghkatc2q.apps.googleusercontent.com","aud":"517342657945-qv1ltq618ijj9edusgnnbpmbghkatc2q.apps.googleusercontent.com","sub":"116867304971284798566","email":"marian.caikovski@gmail.com","email_verified":true,"at_hash":"KCke_NuWSW34Kbx9_8mFoA","nonce":"1845254249","exp":1517436260,"iss":"https://accounts.google.com","iat":1517432660}
    
  5. Authenticate the user in your application.

To keep my sample application simple, all those steps are done in a servlet. If any check fails, a error message is displayed with a link for sign in. If user successfully logs in, his email is displayed. The email suffices for authentication in the back end.

@WebServlet(name = "MyAuthServlet", urlPatterns = {"/server"})
public class MyAuthServlet extends HttpServlet {

    OpenId openId = new OpenId();

    protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
        response.setContentType("text/html;charset=UTF-8");
        String code = request.getParameter(CODE);
        HttpSession session = request.getSession();
        String receivedState = request.getParameter(STATE); 
        String savedState = (String) session.getAttribute(STATE);
        String newState = openId.getState();
        session.setAttribute(STATE, newState);
        String savedNonce = (String) session.getAttribute(NONCE);
        String newNonce = openId.getNonce();
        session.setAttribute(NONCE, newNonce);

        try (PrintWriter out = response.getWriter()) {
            if (code != null) {
                if (savedState.equals(receivedState)) {
                    String idToken = openId.exchangeCodeForToken(code);
                    if (idToken != null) {
                        JsonObject json = openId.decodeIdToken(idToken);
                        String receivedNonce = json.getString(NONCE);
                        if (savedNonce.equals(receivedNonce)) {
                            String email = json.getString(EMAIL);
                            out.println("<p>Hello " + email + "</p>");
                            return;
                        } else {
                            out.println("Nonces differ");
                        }
                    } else {
                        out.println("Id token is missing");
                    }
                } else {
                    out.println("States are different");
                }
            } else {
                out.println("<p>Code is null</p>");
            }
            out.println("<a href='" + openId.getUrl(newState, newNonce) + "'>Click to sign in</a>");
        }
    }
}

Wednesday, January 31, 2018

POST with HttpUrlConnection

A shot note on POST requests:

public InputStream post(String url, String params) throws IOException {
    URL u = new URL(url);
    HttpURLConnection con = (HttpURLConnection) u.openConnection();
    con.setRequestMethod("POST");
    con.setDoOutput(true);
    try (OutputStreamWriter out = new OutputStreamWriter(con.getOutputStream())) {
        out.write(params);
    }

    return con.getInputStream();
}

GET request much simpler:

    String getAmazonHostName() throws  IOException {
        URL url = new URL("http://169.254.169.254/latest/meta-data/public-hostname");
        try (BufferedReader in = new BufferedReader( new InputStreamReader(url.openStream()))) {
            String inputLine = in.readLine();
            System.out.println("amazon public hostname: " + inputLine);
            return inputLine;
        }
    }